Show the code
targets::tar_visnetwork(targets_only =T)Predictive Models for BoardGameGeek Ratings
targets::tar_visnetwork(targets_only =T)model_board = pins::board_folder("models",
versioned = T)
averageweight_fit =
vetiver_pin_read(
model_board,
"bgg_averageweight"
)
average_fit =
vetiver_pin_read(
model_board,
"bgg_average"
)
usersrated_fit =
vetiver_pin_read(
model_board,
"bgg_usersrated"
)valid_predictions |>
pivot_outcomes() |>
left_join(
games |>
bggUtils:::unnest_outcomes() |>
select(game_id, usersrated),
by = join_by(game_id)
) |>
plot_predictions(alpha = usersrated)+
theme(legend.title = element_text())targets_tracking_details(metrics = valid_metrics,
details = details) |>
select(model, minratings, outcome, any_of(c("rmse", "mae", "mape", "rsq", "ccc"))) |>
filter(minratings == 25) |>
select(minratings, everything()) |>
gt::gt() |>
gt::tab_options(quarto.disable_processing = T) |>
gtExtras::gt_theme_espn()| minratings | model | outcome | rmse | mae | mape | rsq | ccc |
|---|---|---|---|---|---|---|---|
| 25 | glmnet | average | 0.683 | 0.502 | 7.557 | 0.287 | 0.471 |
| 25 | lightgbm | averageweight | 0.457 | 0.347 | 19.210 | 0.665 | 0.804 |
| 25 | glmnet+glmnet | bayesaverage | 0.301 | 0.174 | 2.878 | 0.413 | 0.635 |
| 25 | glmnet | usersrated | 1863.498 | 464.973 | 165.513 | 0.146 | 0.380 |
predictions =
upcoming_games |>
impute_averageweight(
model = averageweight_fit
) |>
predict_bayesaverage(
average_model = average_fit,
usersrated_model = usersrated_fit
)
predictions |>
filter(yearpublished >= 2024) |>
# this goddamn bah humbug game
filter(game_id != 388225) |>
predictions_dt(games = games) |>
add_colors()